In [1]:
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf

keras = tf.keras
In [2]:
def plot_series(time, series, format="-", start=0, end=None, label=None):
    plt.plot(time[start:end], series[start:end], format, label=label)
    plt.xlabel("Time")
    plt.ylabel("Value")
    if label:
        plt.legend(fontsize=14)
    plt.grid(True)
    
def trend(time, slope=0):
    return slope * time
  
  
def seasonal_pattern(season_time):
    """Just an arbitrary pattern, you can change it if you wish"""
    return np.where(season_time < 0.4,
                    np.cos(season_time * 2 * np.pi),
                    1 / np.exp(3 * season_time))

  
def seasonality(time, period, amplitude=1, phase=0):
    """Repeats the same pattern at each period"""
    season_time = ((time + phase) % period) / period
    return amplitude * seasonal_pattern(season_time)
  
  
def white_noise(time, noise_level=1, seed=None):
    rnd = np.random.RandomState(seed)
    return rnd.randn(len(time)) * noise_level
  
  
def window_dataset(series, window_size, batch_size=32,
                   shuffle_buffer=1000):
    dataset = tf.data.Dataset.from_tensor_slices(series)
    dataset = dataset.window(window_size + 1, shift=1, drop_remainder=True)
    dataset = dataset.flat_map(lambda window: window.batch(window_size + 1))
    dataset = dataset.shuffle(shuffle_buffer)
    dataset = dataset.map(lambda window: (window[:-1], window[-1]))
    dataset = dataset.batch(batch_size).prefetch(1)
    return dataset
  
def model_forecast(model, series, window_size):
    ds = tf.data.Dataset.from_tensor_slices(series)
    ds = ds.window(window_size, shift=1, drop_remainder=True)
    ds = ds.flat_map(lambda w: w.batch(window_size))
    ds = ds.batch(32).prefetch(1)
    forecast = model.predict(ds)
    return forecast
In [3]:
time = np.arange(4 * 365 + 1)

slope = 0.05
baseline = 10
amplitude = 40
series = baseline + trend(time, slope) + seasonality(time, period=365, amplitude=amplitude)

noise_level = 5
noise = white_noise(time, noise_level, seed=42)

series += noise

plt.figure(figsize=(10, 6))
plot_series(time, series)
plt.show()
In [4]:
split_time = 1000
time_train = time[:split_time]
x_train = series[:split_time]
time_valid = time[split_time:]
x_valid = series[split_time:]

Simple RNN Forecasting

In [5]:
keras.backend.clear_session()
tf.random.set_seed(42)
np.random.seed(42)

window_size = 30
train_set = window_dataset(x_train, window_size, batch_size=128)

model = keras.models.Sequential([
  keras.layers.Lambda(lambda x: tf.expand_dims(x, axis=-1),
                      input_shape=[None]),
  keras.layers.SimpleRNN(100, return_sequences=True),
  keras.layers.SimpleRNN(100),
  keras.layers.Dense(1),
  keras.layers.Lambda(lambda x: x * 200.0)
])
lr_schedule = keras.callbacks.LearningRateScheduler(
    lambda epoch: 1e-7 * 10**(epoch / 20))
optimizer = keras.optimizers.SGD(lr=1e-7, momentum=0.9)
model.compile(loss=keras.losses.Huber(),
              optimizer=optimizer,
              metrics=["mae"])
history = model.fit(train_set, epochs=100, callbacks=[lr_schedule])
Epoch 1/100
8/8 [==============================] - 0s 56ms/step - loss: 50.2172 - mae: 50.7170
Epoch 2/100
8/8 [==============================] - 0s 58ms/step - loss: 23.0009 - mae: 23.4933
Epoch 3/100
8/8 [==============================] - 0s 56ms/step - loss: 19.2074 - mae: 19.7004
Epoch 4/100
8/8 [==============================] - 0s 50ms/step - loss: 16.9702 - mae: 17.4606
Epoch 5/100
8/8 [==============================] - 0s 52ms/step - loss: 15.4094 - mae: 15.8988
Epoch 6/100
8/8 [==============================] - 0s 53ms/step - loss: 14.7507 - mae: 15.2367
Epoch 7/100
8/8 [==============================] - 0s 57ms/step - loss: 14.3058 - mae: 14.7931
Epoch 8/100
8/8 [==============================] - 0s 54ms/step - loss: 14.0615 - mae: 14.5512
Epoch 9/100
8/8 [==============================] - 0s 51ms/step - loss: 13.7812 - mae: 14.2687
Epoch 10/100
8/8 [==============================] - 0s 52ms/step - loss: 13.5640 - mae: 14.0522
Epoch 11/100
8/8 [==============================] - 0s 54ms/step - loss: 13.4094 - mae: 13.8972
Epoch 12/100
8/8 [==============================] - 0s 57ms/step - loss: 13.0179 - mae: 13.5048
Epoch 13/100
8/8 [==============================] - 0s 54ms/step - loss: 12.8761 - mae: 13.3629
Epoch 14/100
8/8 [==============================] - 0s 56ms/step - loss: 12.3934 - mae: 12.8809
Epoch 15/100
8/8 [==============================] - 0s 53ms/step - loss: 12.0738 - mae: 12.5614
Epoch 16/100
8/8 [==============================] - 0s 54ms/step - loss: 11.8488 - mae: 12.3347
Epoch 17/100
8/8 [==============================] - 0s 56ms/step - loss: 11.4459 - mae: 11.9320
Epoch 18/100
8/8 [==============================] - 0s 53ms/step - loss: 11.1943 - mae: 11.6819
Epoch 19/100
8/8 [==============================] - 0s 57ms/step - loss: 10.9563 - mae: 11.4441
Epoch 20/100
8/8 [==============================] - 0s 54ms/step - loss: 10.6491 - mae: 11.1317
Epoch 21/100
8/8 [==============================] - 0s 56ms/step - loss: 10.3188 - mae: 10.8052
Epoch 22/100
8/8 [==============================] - 0s 56ms/step - loss: 10.3916 - mae: 10.8774
Epoch 23/100
8/8 [==============================] - 0s 57ms/step - loss: 9.8752 - mae: 10.3596
Epoch 24/100
8/8 [==============================] - 0s 55ms/step - loss: 10.1089 - mae: 10.5951
Epoch 25/100
8/8 [==============================] - 0s 58ms/step - loss: 9.8868 - mae: 10.3706
Epoch 26/100
8/8 [==============================] - 0s 57ms/step - loss: 11.5286 - mae: 12.0164
Epoch 27/100
8/8 [==============================] - 0s 57ms/step - loss: 10.3070 - mae: 10.7948
Epoch 28/100
8/8 [==============================] - 0s 53ms/step - loss: 9.2488 - mae: 9.7363
Epoch 29/100
8/8 [==============================] - 0s 51ms/step - loss: 10.3132 - mae: 10.8018
Epoch 30/100
8/8 [==============================] - 0s 56ms/step - loss: 14.9935 - mae: 15.4873
Epoch 31/100
8/8 [==============================] - 0s 52ms/step - loss: 20.2741 - mae: 20.7714
Epoch 32/100
8/8 [==============================] - 0s 52ms/step - loss: 19.4971 - mae: 19.9944
Epoch 33/100
8/8 [==============================] - 0s 53ms/step - loss: 19.0401 - mae: 19.5383
Epoch 34/100
8/8 [==============================] - 0s 54ms/step - loss: 19.7637 - mae: 20.2594
Epoch 35/100
8/8 [==============================] - 0s 49ms/step - loss: 19.3049 - mae: 19.8015
Epoch 36/100
8/8 [==============================] - 0s 52ms/step - loss: 19.4934 - mae: 19.9908
Epoch 37/100
8/8 [==============================] - 0s 54ms/step - loss: 16.0339 - mae: 16.5288
Epoch 38/100
8/8 [==============================] - 0s 52ms/step - loss: 20.8350 - mae: 21.3333
Epoch 39/100
8/8 [==============================] - 0s 54ms/step - loss: 23.6088 - mae: 24.1081
Epoch 40/100
8/8 [==============================] - 0s 52ms/step - loss: 23.3312 - mae: 23.8284
Epoch 41/100
8/8 [==============================] - 0s 50ms/step - loss: 23.7148 - mae: 24.2112
Epoch 42/100
8/8 [==============================] - 0s 53ms/step - loss: 38.6681 - mae: 39.1675
Epoch 43/100
8/8 [==============================] - 0s 52ms/step - loss: 51.5058 - mae: 52.0051
Epoch 44/100
8/8 [==============================] - 0s 51ms/step - loss: 48.6449 - mae: 49.1442
Epoch 45/100
8/8 [==============================] - 0s 53ms/step - loss: 52.7117 - mae: 53.2111
Epoch 46/100
8/8 [==============================] - 0s 53ms/step - loss: 57.5515 - mae: 58.0499
Epoch 47/100
8/8 [==============================] - 0s 53ms/step - loss: 29.7483 - mae: 30.2439
Epoch 48/100
8/8 [==============================] - 0s 51ms/step - loss: 16.7614 - mae: 17.2563
Epoch 49/100
8/8 [==============================] - 0s 54ms/step - loss: 76.8095 - mae: 77.3089
Epoch 50/100
8/8 [==============================] - 0s 54ms/step - loss: 39.4904 - mae: 39.9877
Epoch 51/100
8/8 [==============================] - 0s 51ms/step - loss: 41.1710 - mae: 41.6686
Epoch 52/100
8/8 [==============================] - 0s 52ms/step - loss: 25.2406 - mae: 25.7370
Epoch 53/100
8/8 [==============================] - 0s 53ms/step - loss: 83.4479 - mae: 83.9463
Epoch 54/100
8/8 [==============================] - 0s 57ms/step - loss: 121.7076 - mae: 122.2065
Epoch 55/100
8/8 [==============================] - 0s 52ms/step - loss: 56.8595 - mae: 57.3573
Epoch 56/100
8/8 [==============================] - 0s 54ms/step - loss: 61.8003 - mae: 62.2986
Epoch 57/100
8/8 [==============================] - 0s 55ms/step - loss: 60.8980 - mae: 61.3977
Epoch 58/100
8/8 [==============================] - 0s 56ms/step - loss: 60.5514 - mae: 61.0514
Epoch 59/100
8/8 [==============================] - 0s 57ms/step - loss: 64.1935 - mae: 64.6935
Epoch 60/100
8/8 [==============================] - 0s 52ms/step - loss: 76.0601 - mae: 76.5600
Epoch 61/100
8/8 [==============================] - 0s 52ms/step - loss: 84.7288 - mae: 85.2288
Epoch 62/100
8/8 [==============================] - 0s 50ms/step - loss: 99.1168 - mae: 99.6168
Epoch 63/100
8/8 [==============================] - 0s 53ms/step - loss: 106.6997 - mae: 107.1997
Epoch 64/100
8/8 [==============================] - 0s 52ms/step - loss: 122.0110 - mae: 122.5109
Epoch 65/100
8/8 [==============================] - 0s 54ms/step - loss: 180.7567 - mae: 181.2567
Epoch 66/100
8/8 [==============================] - 0s 53ms/step - loss: 207.0638 - mae: 207.5638
Epoch 67/100
8/8 [==============================] - 0s 52ms/step - loss: 281.5504 - mae: 282.0504
Epoch 68/100
8/8 [==============================] - 0s 51ms/step - loss: 301.8343 - mae: 302.3338
Epoch 69/100
8/8 [==============================] - 0s 51ms/step - loss: 401.2092 - mae: 401.7092
Epoch 70/100
8/8 [==============================] - 0s 52ms/step - loss: 464.7565 - mae: 465.2565
Epoch 71/100
8/8 [==============================] - 0s 58ms/step - loss: 559.5812 - mae: 560.0812
Epoch 72/100
8/8 [==============================] - 0s 53ms/step - loss: 664.8901 - mae: 665.3901
Epoch 73/100
8/8 [==============================] - 0s 54ms/step - loss: 753.6339 - mae: 754.1339
Epoch 74/100
8/8 [==============================] - 0s 53ms/step - loss: 847.2518 - mae: 847.7518
Epoch 75/100
8/8 [==============================] - 0s 55ms/step - loss: 952.5220 - mae: 953.0220
Epoch 76/100
8/8 [==============================] - 0s 50ms/step - loss: 1048.4165 - mae: 1048.9165
Epoch 77/100
8/8 [==============================] - 0s 51ms/step - loss: 1317.4973 - mae: 1317.9973
Epoch 78/100
8/8 [==============================] - 0s 54ms/step - loss: 1787.5543 - mae: 1788.0533
Epoch 79/100
8/8 [==============================] - 0s 52ms/step - loss: 2753.4612 - mae: 2753.9612
Epoch 80/100
8/8 [==============================] - 0s 53ms/step - loss: 2293.1047 - mae: 2293.6047
Epoch 81/100
8/8 [==============================] - 0s 51ms/step - loss: 4103.0674 - mae: 4103.5674
Epoch 82/100
8/8 [==============================] - 0s 51ms/step - loss: 2903.2427 - mae: 2903.7427
Epoch 83/100
8/8 [==============================] - 0s 56ms/step - loss: 2525.4575 - mae: 2525.9575
Epoch 84/100
8/8 [==============================] - 0s 53ms/step - loss: 2977.4141 - mae: 2977.9141
Epoch 85/100
8/8 [==============================] - 0s 53ms/step - loss: 3359.5276 - mae: 3360.0276
Epoch 86/100
8/8 [==============================] - 0s 49ms/step - loss: 3621.2993 - mae: 3621.7993
Epoch 87/100
8/8 [==============================] - 0s 50ms/step - loss: 4661.4785 - mae: 4661.9785
Epoch 88/100
8/8 [==============================] - 0s 57ms/step - loss: 3644.8862 - mae: 3645.3862
Epoch 89/100
8/8 [==============================] - 0s 56ms/step - loss: 5805.4824 - mae: 5805.9824
Epoch 90/100
8/8 [==============================] - 0s 54ms/step - loss: 5699.4951 - mae: 5699.9951
Epoch 91/100
8/8 [==============================] - 0s 54ms/step - loss: 6602.1870 - mae: 6602.6870
Epoch 92/100
8/8 [==============================] - 0s 57ms/step - loss: 6929.2568 - mae: 6929.7568
Epoch 93/100
8/8 [==============================] - 0s 55ms/step - loss: 8160.5728 - mae: 8161.0728
Epoch 94/100
8/8 [==============================] - 0s 54ms/step - loss: 10142.0850 - mae: 10142.5850
Epoch 95/100
8/8 [==============================] - 0s 51ms/step - loss: 11346.2910 - mae: 11346.7910
Epoch 96/100
8/8 [==============================] - 0s 56ms/step - loss: 13970.3115 - mae: 13970.8115
Epoch 97/100
8/8 [==============================] - 0s 50ms/step - loss: 26259.7891 - mae: 26260.2891
Epoch 98/100
8/8 [==============================] - 0s 53ms/step - loss: 7772.9980 - mae: 7773.4980
Epoch 99/100
8/8 [==============================] - 0s 55ms/step - loss: 37133.1445 - mae: 37133.6445
Epoch 100/100
8/8 [==============================] - 0s 54ms/step - loss: 59225.3398 - mae: 59225.8438
In [6]:
plt.semilogx(history.history["lr"], history.history["loss"])
plt.axis([1e-7, 1e-4, 0, 30])
Out[6]:
(1e-07, 0.0001, 0.0, 30.0)
In [7]:
keras.backend.clear_session()
tf.random.set_seed(42)
np.random.seed(42)

window_size = 30
train_set = window_dataset(x_train, window_size, batch_size=128)
valid_set = window_dataset(x_valid, window_size, batch_size=128)

model = keras.models.Sequential([
  keras.layers.Lambda(lambda x: tf.expand_dims(x, axis=-1),
                      input_shape=[None]),
  keras.layers.SimpleRNN(100, return_sequences=True),
  keras.layers.SimpleRNN(100),
  keras.layers.Dense(1),
  keras.layers.Lambda(lambda x: x * 200.0)
])
optimizer = keras.optimizers.SGD(lr=1.5e-6, momentum=0.9)
model.compile(loss=keras.losses.Huber(),
              optimizer=optimizer,
              metrics=["mae"])
early_stopping = keras.callbacks.EarlyStopping(patience=50)
model_checkpoint = keras.callbacks.ModelCheckpoint(
    "my_checkpoint", save_best_only=True)
model.fit(train_set, epochs=500,
          validation_data=valid_set,
          callbacks=[early_stopping, model_checkpoint])
Epoch 1/500
      8/Unknown - 0s 53ms/step - loss: 54.8857 - mae: 55.3843WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/tracking.py:111: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.
Instructions for updating:
This property should not be used in TensorFlow 2.0, as updates are applied automatically.
WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/tracking.py:111: Layer.updates (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.
Instructions for updating:
This property should not be used in TensorFlow 2.0, as updates are applied automatically.
INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 498ms/step - loss: 54.8857 - mae: 55.3843 - val_loss: 53.7733 - val_mae: 54.2733
Epoch 2/500
8/8 [==============================] - ETA: 0s - loss: 35.4596 - mae: 35.9565INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 452ms/step - loss: 35.4596 - mae: 35.9565 - val_loss: 17.5201 - val_mae: 18.0163
Epoch 3/500
8/8 [==============================] - 1s 64ms/step - loss: 22.5446 - mae: 23.0386 - val_loss: 18.8170 - val_mae: 19.3139
Epoch 4/500
8/8 [==============================] - 1s 65ms/step - loss: 16.2156 - mae: 16.7054 - val_loss: 20.0784 - val_mae: 20.5771
Epoch 5/500
8/8 [==============================] - ETA: 0s - loss: 13.7456 - mae: 14.2346INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 462ms/step - loss: 13.7456 - mae: 14.2346 - val_loss: 6.8457 - val_mae: 7.3288
Epoch 6/500
8/8 [==============================] - 1s 66ms/step - loss: 11.3547 - mae: 11.8350 - val_loss: 7.2706 - val_mae: 7.7554
Epoch 7/500
8/8 [==============================] - 1s 68ms/step - loss: 10.7631 - mae: 11.2440 - val_loss: 11.2744 - val_mae: 11.7677
Epoch 8/500
8/8 [==============================] - ETA: 0s - loss: 10.3828 - mae: 10.8695INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 3s 432ms/step - loss: 10.3828 - mae: 10.8695 - val_loss: 6.3161 - val_mae: 6.8009
Epoch 9/500
8/8 [==============================] - ETA: 0s - loss: 9.3880 - mae: 9.8653 INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 481ms/step - loss: 9.3880 - mae: 9.8653 - val_loss: 5.4276 - val_mae: 5.9022
Epoch 10/500
8/8 [==============================] - ETA: 0s - loss: 9.2524 - mae: 9.7366INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 438ms/step - loss: 9.2524 - mae: 9.7366 - val_loss: 5.4024 - val_mae: 5.8834
Epoch 11/500
8/8 [==============================] - 1s 64ms/step - loss: 9.0369 - mae: 9.5203 - val_loss: 6.5509 - val_mae: 7.0338
Epoch 12/500
8/8 [==============================] - 1s 70ms/step - loss: 8.7484 - mae: 9.2316 - val_loss: 6.1082 - val_mae: 6.5865
Epoch 13/500
8/8 [==============================] - 1s 67ms/step - loss: 8.3674 - mae: 8.8468 - val_loss: 6.2307 - val_mae: 6.7115
Epoch 14/500
8/8 [==============================] - 1s 67ms/step - loss: 8.3383 - mae: 8.8220 - val_loss: 5.7183 - val_mae: 6.2005
Epoch 15/500
8/8 [==============================] - 1s 68ms/step - loss: 8.4829 - mae: 8.9644 - val_loss: 6.1669 - val_mae: 6.6478
Epoch 16/500
8/8 [==============================] - 1s 69ms/step - loss: 7.8410 - mae: 8.3221 - val_loss: 7.1299 - val_mae: 7.6139
Epoch 17/500
8/8 [==============================] - 1s 70ms/step - loss: 7.9633 - mae: 8.4470 - val_loss: 7.2034 - val_mae: 7.6887
Epoch 18/500
8/8 [==============================] - 1s 67ms/step - loss: 7.4940 - mae: 7.9735 - val_loss: 6.2201 - val_mae: 6.7029
Epoch 19/500
8/8 [==============================] - 1s 72ms/step - loss: 7.4794 - mae: 7.9629 - val_loss: 5.8512 - val_mae: 6.3344
Epoch 20/500
8/8 [==============================] - 1s 67ms/step - loss: 7.2516 - mae: 7.7335 - val_loss: 5.8174 - val_mae: 6.2992
Epoch 21/500
8/8 [==============================] - 1s 67ms/step - loss: 7.2803 - mae: 7.7614 - val_loss: 5.6090 - val_mae: 6.0905
Epoch 22/500
8/8 [==============================] - ETA: 0s - loss: 7.1217 - mae: 7.5991INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 472ms/step - loss: 7.1217 - mae: 7.5991 - val_loss: 5.2088 - val_mae: 5.6917
Epoch 23/500
8/8 [==============================] - 1s 66ms/step - loss: 7.3137 - mae: 7.7910 - val_loss: 5.3233 - val_mae: 5.8034
Epoch 24/500
8/8 [==============================] - 1s 68ms/step - loss: 7.2679 - mae: 7.7480 - val_loss: 5.9504 - val_mae: 6.4389
Epoch 25/500
8/8 [==============================] - 1s 68ms/step - loss: 7.6199 - mae: 8.1031 - val_loss: 6.5998 - val_mae: 7.0801
Epoch 26/500
8/8 [==============================] - 1s 68ms/step - loss: 6.8836 - mae: 7.3655 - val_loss: 6.4157 - val_mae: 6.8921
Epoch 27/500
8/8 [==============================] - 1s 67ms/step - loss: 6.5925 - mae: 7.0718 - val_loss: 5.8379 - val_mae: 6.3249
Epoch 28/500
8/8 [==============================] - 1s 69ms/step - loss: 7.0310 - mae: 7.5138 - val_loss: 5.6762 - val_mae: 6.1613
Epoch 29/500
8/8 [==============================] - 1s 68ms/step - loss: 6.6602 - mae: 7.1395 - val_loss: 5.6041 - val_mae: 6.0879
Epoch 30/500
8/8 [==============================] - 1s 76ms/step - loss: 7.0822 - mae: 7.5634 - val_loss: 8.3001 - val_mae: 8.7916
Epoch 31/500
8/8 [==============================] - 1s 70ms/step - loss: 6.4865 - mae: 6.9620 - val_loss: 6.3714 - val_mae: 6.8489
Epoch 32/500
8/8 [==============================] - 1s 67ms/step - loss: 6.1872 - mae: 6.6608 - val_loss: 5.2598 - val_mae: 5.7392
Epoch 33/500
8/8 [==============================] - 1s 66ms/step - loss: 6.5049 - mae: 6.9888 - val_loss: 5.2490 - val_mae: 5.7278
Epoch 34/500
8/8 [==============================] - 1s 66ms/step - loss: 6.4857 - mae: 6.9655 - val_loss: 5.4382 - val_mae: 5.9172
Epoch 35/500
8/8 [==============================] - 1s 71ms/step - loss: 6.0975 - mae: 6.5742 - val_loss: 5.3990 - val_mae: 5.8758
Epoch 36/500
8/8 [==============================] - 1s 67ms/step - loss: 6.0247 - mae: 6.5033 - val_loss: 5.3508 - val_mae: 5.8272
Epoch 37/500
8/8 [==============================] - ETA: 0s - loss: 5.9693 - mae: 6.4466INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 441ms/step - loss: 5.9693 - mae: 6.4466 - val_loss: 5.0254 - val_mae: 5.5085
Epoch 38/500
8/8 [==============================] - ETA: 0s - loss: 6.2703 - mae: 6.7460INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 472ms/step - loss: 6.2703 - mae: 6.7460 - val_loss: 4.9862 - val_mae: 5.4679
Epoch 39/500
8/8 [==============================] - 1s 68ms/step - loss: 6.3089 - mae: 6.7863 - val_loss: 5.0001 - val_mae: 5.4845
Epoch 40/500
8/8 [==============================] - 1s 70ms/step - loss: 6.6672 - mae: 7.1476 - val_loss: 5.5822 - val_mae: 6.0633
Epoch 41/500
8/8 [==============================] - 1s 66ms/step - loss: 6.3526 - mae: 6.8320 - val_loss: 6.4416 - val_mae: 6.9244
Epoch 42/500
8/8 [==============================] - 1s 66ms/step - loss: 5.9507 - mae: 6.4282 - val_loss: 5.0355 - val_mae: 5.5096
Epoch 43/500
8/8 [==============================] - 1s 69ms/step - loss: 6.1178 - mae: 6.5932 - val_loss: 5.0035 - val_mae: 5.4783
Epoch 44/500
8/8 [==============================] - ETA: 0s - loss: 6.0756 - mae: 6.5563INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 463ms/step - loss: 6.0756 - mae: 6.5563 - val_loss: 4.9765 - val_mae: 5.4604
Epoch 45/500
8/8 [==============================] - 1s 73ms/step - loss: 6.1126 - mae: 6.5907 - val_loss: 4.9838 - val_mae: 5.4653
Epoch 46/500
8/8 [==============================] - 1s 73ms/step - loss: 5.9622 - mae: 6.4383 - val_loss: 5.1972 - val_mae: 5.6715
Epoch 47/500
8/8 [==============================] - 1s 70ms/step - loss: 7.1638 - mae: 7.6500 - val_loss: 6.7112 - val_mae: 7.1967
Epoch 48/500
8/8 [==============================] - 1s 68ms/step - loss: 7.4840 - mae: 7.9712 - val_loss: 10.5735 - val_mae: 11.0683
Epoch 49/500
8/8 [==============================] - 1s 69ms/step - loss: 6.8847 - mae: 7.3676 - val_loss: 6.1744 - val_mae: 6.6537
Epoch 50/500
8/8 [==============================] - 1s 66ms/step - loss: 6.6974 - mae: 7.1794 - val_loss: 5.0917 - val_mae: 5.5721
Epoch 51/500
8/8 [==============================] - 1s 67ms/step - loss: 7.0775 - mae: 7.5633 - val_loss: 5.2916 - val_mae: 5.7698
Epoch 52/500
8/8 [==============================] - 1s 70ms/step - loss: 6.4534 - mae: 6.9357 - val_loss: 5.8080 - val_mae: 6.2869
Epoch 53/500
8/8 [==============================] - 1s 67ms/step - loss: 6.2476 - mae: 6.7317 - val_loss: 9.4458 - val_mae: 9.9417
Epoch 54/500
8/8 [==============================] - 1s 66ms/step - loss: 6.6254 - mae: 7.1118 - val_loss: 5.1425 - val_mae: 5.6192
Epoch 55/500
8/8 [==============================] - 1s 68ms/step - loss: 6.7725 - mae: 7.2525 - val_loss: 5.7713 - val_mae: 6.2561
Epoch 56/500
8/8 [==============================] - 1s 73ms/step - loss: 6.9321 - mae: 7.4200 - val_loss: 5.7383 - val_mae: 6.2184
Epoch 57/500
8/8 [==============================] - 1s 69ms/step - loss: 5.8178 - mae: 6.2985 - val_loss: 5.1172 - val_mae: 5.5917
Epoch 58/500
8/8 [==============================] - 1s 69ms/step - loss: 6.0433 - mae: 6.5190 - val_loss: 7.4305 - val_mae: 7.9154
Epoch 59/500
8/8 [==============================] - 1s 73ms/step - loss: 5.7946 - mae: 6.2723 - val_loss: 6.6449 - val_mae: 7.1292
Epoch 60/500
8/8 [==============================] - 1s 70ms/step - loss: 5.6307 - mae: 6.1074 - val_loss: 5.5807 - val_mae: 6.0650
Epoch 61/500
8/8 [==============================] - 1s 69ms/step - loss: 5.4900 - mae: 5.9733 - val_loss: 5.1648 - val_mae: 5.6430
Epoch 62/500
8/8 [==============================] - 1s 74ms/step - loss: 5.7959 - mae: 6.2787 - val_loss: 6.7673 - val_mae: 7.2547
Epoch 63/500
8/8 [==============================] - 1s 67ms/step - loss: 5.8491 - mae: 6.3292 - val_loss: 5.5542 - val_mae: 6.0376
Epoch 64/500
8/8 [==============================] - 1s 69ms/step - loss: 6.0058 - mae: 6.4879 - val_loss: 5.3563 - val_mae: 5.8396
Epoch 65/500
8/8 [==============================] - 1s 66ms/step - loss: 6.2939 - mae: 6.7773 - val_loss: 5.0565 - val_mae: 5.5311
Epoch 66/500
8/8 [==============================] - 1s 69ms/step - loss: 5.7496 - mae: 6.2316 - val_loss: 6.1683 - val_mae: 6.6487
Epoch 67/500
8/8 [==============================] - ETA: 0s - loss: 5.4430 - mae: 5.9245INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 441ms/step - loss: 5.4430 - mae: 5.9245 - val_loss: 4.9655 - val_mae: 5.4472
Epoch 68/500
8/8 [==============================] - 1s 69ms/step - loss: 5.5585 - mae: 6.0357 - val_loss: 5.0738 - val_mae: 5.5518
Epoch 69/500
8/8 [==============================] - 1s 72ms/step - loss: 5.8952 - mae: 6.3778 - val_loss: 5.3735 - val_mae: 5.8562
Epoch 70/500
8/8 [==============================] - 1s 67ms/step - loss: 6.3722 - mae: 6.8537 - val_loss: 8.4078 - val_mae: 8.8985
Epoch 71/500
8/8 [==============================] - 1s 67ms/step - loss: 5.9510 - mae: 6.4321 - val_loss: 5.0425 - val_mae: 5.5215
Epoch 72/500
8/8 [==============================] - 1s 67ms/step - loss: 5.4763 - mae: 5.9503 - val_loss: 5.0605 - val_mae: 5.5426
Epoch 73/500
8/8 [==============================] - 1s 68ms/step - loss: 5.8305 - mae: 6.3058 - val_loss: 5.1049 - val_mae: 5.5825
Epoch 74/500
8/8 [==============================] - 1s 69ms/step - loss: 5.6705 - mae: 6.1504 - val_loss: 7.8165 - val_mae: 8.3056
Epoch 75/500
8/8 [==============================] - 1s 71ms/step - loss: 5.9062 - mae: 6.3867 - val_loss: 6.9281 - val_mae: 7.4137
Epoch 76/500
8/8 [==============================] - 1s 71ms/step - loss: 5.5544 - mae: 6.0323 - val_loss: 5.3406 - val_mae: 5.8189
Epoch 77/500
8/8 [==============================] - 1s 69ms/step - loss: 5.4786 - mae: 5.9571 - val_loss: 7.4993 - val_mae: 7.9862
Epoch 78/500
8/8 [==============================] - 1s 67ms/step - loss: 5.9443 - mae: 6.4283 - val_loss: 5.0673 - val_mae: 5.5435
Epoch 79/500
8/8 [==============================] - 1s 69ms/step - loss: 6.6319 - mae: 7.1174 - val_loss: 5.0747 - val_mae: 5.5573
Epoch 80/500
8/8 [==============================] - 1s 68ms/step - loss: 5.7919 - mae: 6.2716 - val_loss: 7.9725 - val_mae: 8.4613
Epoch 81/500
8/8 [==============================] - 1s 70ms/step - loss: 5.4489 - mae: 5.9272 - val_loss: 6.9996 - val_mae: 7.4858
Epoch 82/500
8/8 [==============================] - 1s 67ms/step - loss: 5.3073 - mae: 5.7876 - val_loss: 6.3977 - val_mae: 6.8797
Epoch 83/500
8/8 [==============================] - 1s 72ms/step - loss: 5.2079 - mae: 5.6897 - val_loss: 5.0611 - val_mae: 5.5427
Epoch 84/500
8/8 [==============================] - 1s 70ms/step - loss: 5.1165 - mae: 5.5954 - val_loss: 5.0673 - val_mae: 5.5487
Epoch 85/500
8/8 [==============================] - 1s 68ms/step - loss: 5.1233 - mae: 5.5969 - val_loss: 5.1984 - val_mae: 5.6756
Epoch 86/500
8/8 [==============================] - 1s 68ms/step - loss: 5.1349 - mae: 5.6123 - val_loss: 4.9879 - val_mae: 5.4746
Epoch 87/500
8/8 [==============================] - ETA: 0s - loss: 5.0676 - mae: 5.5474INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 469ms/step - loss: 5.0676 - mae: 5.5474 - val_loss: 4.9456 - val_mae: 5.4310
Epoch 88/500
8/8 [==============================] - 1s 68ms/step - loss: 5.6988 - mae: 6.1809 - val_loss: 4.9810 - val_mae: 5.4567
Epoch 89/500
8/8 [==============================] - 1s 68ms/step - loss: 5.6044 - mae: 6.0865 - val_loss: 5.5547 - val_mae: 6.0349
Epoch 90/500
8/8 [==============================] - 1s 66ms/step - loss: 5.0368 - mae: 5.5088 - val_loss: 5.2880 - val_mae: 5.7699
Epoch 91/500
8/8 [==============================] - 1s 70ms/step - loss: 4.9507 - mae: 5.4262 - val_loss: 6.5973 - val_mae: 7.0799
Epoch 92/500
8/8 [==============================] - 1s 65ms/step - loss: 5.0542 - mae: 5.5284 - val_loss: 5.5571 - val_mae: 6.0364
Epoch 93/500
8/8 [==============================] - 1s 67ms/step - loss: 4.9388 - mae: 5.4120 - val_loss: 5.1258 - val_mae: 5.6041
Epoch 94/500
8/8 [==============================] - 1s 69ms/step - loss: 5.1849 - mae: 5.6616 - val_loss: 8.8054 - val_mae: 9.2980
Epoch 95/500
8/8 [==============================] - 1s 67ms/step - loss: 5.6227 - mae: 6.0971 - val_loss: 6.0098 - val_mae: 6.4888
Epoch 96/500
8/8 [==============================] - 1s 68ms/step - loss: 4.9709 - mae: 5.4468 - val_loss: 5.8050 - val_mae: 6.2818
Epoch 97/500
8/8 [==============================] - ETA: 0s - loss: 5.0279 - mae: 5.5052INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 3s 436ms/step - loss: 5.0279 - mae: 5.5052 - val_loss: 4.9141 - val_mae: 5.3892
Epoch 98/500
8/8 [==============================] - ETA: 0s - loss: 4.9666 - mae: 5.4429INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 473ms/step - loss: 4.9666 - mae: 5.4429 - val_loss: 4.9100 - val_mae: 5.3930
Epoch 99/500
8/8 [==============================] - 1s 67ms/step - loss: 4.9774 - mae: 5.4514 - val_loss: 4.9773 - val_mae: 5.4530
Epoch 100/500
8/8 [==============================] - 1s 68ms/step - loss: 5.0688 - mae: 5.5446 - val_loss: 4.9497 - val_mae: 5.4250
Epoch 101/500
8/8 [==============================] - 1s 67ms/step - loss: 4.9040 - mae: 5.3803 - val_loss: 5.6017 - val_mae: 6.0794
Epoch 102/500
8/8 [==============================] - 1s 66ms/step - loss: 5.0183 - mae: 5.4926 - val_loss: 6.4838 - val_mae: 6.9687
Epoch 103/500
8/8 [==============================] - 1s 65ms/step - loss: 5.1880 - mae: 5.6656 - val_loss: 5.9399 - val_mae: 6.4177
Epoch 104/500
8/8 [==============================] - 1s 68ms/step - loss: 4.8851 - mae: 5.3596 - val_loss: 5.7729 - val_mae: 6.2631
Epoch 105/500
8/8 [==============================] - 1s 69ms/step - loss: 5.3821 - mae: 5.8554 - val_loss: 4.9261 - val_mae: 5.4131
Epoch 106/500
8/8 [==============================] - 1s 66ms/step - loss: 4.8987 - mae: 5.3723 - val_loss: 4.9417 - val_mae: 5.4202
Epoch 107/500
8/8 [==============================] - 1s 68ms/step - loss: 4.7787 - mae: 5.2532 - val_loss: 6.7643 - val_mae: 7.2509
Epoch 108/500
8/8 [==============================] - 1s 73ms/step - loss: 4.9333 - mae: 5.4066 - val_loss: 5.0794 - val_mae: 5.5598
Epoch 109/500
8/8 [==============================] - 1s 67ms/step - loss: 4.8609 - mae: 5.3372 - val_loss: 5.9341 - val_mae: 6.4116
Epoch 110/500
8/8 [==============================] - ETA: 0s - loss: 4.8677 - mae: 5.3451INFO:tensorflow:Assets written to: my_checkpoint/assets
8/8 [==============================] - 4s 470ms/step - loss: 4.8677 - mae: 5.3451 - val_loss: 4.9018 - val_mae: 5.3773
Epoch 111/500
8/8 [==============================] - 1s 68ms/step - loss: 4.9848 - mae: 5.4589 - val_loss: 5.3269 - val_mae: 5.8103
Epoch 112/500
8/8 [==============================] - 1s 68ms/step - loss: 5.0177 - mae: 5.4949 - val_loss: 5.1846 - val_mae: 5.6698
Epoch 113/500
8/8 [==============================] - 1s 68ms/step - loss: 4.7052 - mae: 5.1779 - val_loss: 5.0072 - val_mae: 5.4805
Epoch 114/500
8/8 [==============================] - 1s 65ms/step - loss: 4.9924 - mae: 5.4697 - val_loss: 5.4819 - val_mae: 5.9591
Epoch 115/500
8/8 [==============================] - 1s 70ms/step - loss: 5.2149 - mae: 5.6951 - val_loss: 8.4177 - val_mae: 8.9080
Epoch 116/500
8/8 [==============================] - 1s 71ms/step - loss: 5.5694 - mae: 6.0533 - val_loss: 4.9969 - val_mae: 5.4694
Epoch 117/500
8/8 [==============================] - 1s 69ms/step - loss: 5.2112 - mae: 5.6912 - val_loss: 5.8994 - val_mae: 6.3907
Epoch 118/500
8/8 [==============================] - 1s 66ms/step - loss: 5.2598 - mae: 5.7377 - val_loss: 5.9525 - val_mae: 6.4277
Epoch 119/500
8/8 [==============================] - 1s 72ms/step - loss: 5.3135 - mae: 5.7963 - val_loss: 6.3334 - val_mae: 6.8156
Epoch 120/500
8/8 [==============================] - 1s 69ms/step - loss: 5.1218 - mae: 5.6007 - val_loss: 4.9825 - val_mae: 5.4659
Epoch 121/500
8/8 [==============================] - 1s 68ms/step - loss: 4.9230 - mae: 5.4008 - val_loss: 5.5417 - val_mae: 6.0267
Epoch 122/500
8/8 [==============================] - 1s 72ms/step - loss: 5.5259 - mae: 6.0096 - val_loss: 5.3483 - val_mae: 5.8313
Epoch 123/500
8/8 [==============================] - 1s 67ms/step - loss: 5.0603 - mae: 5.5404 - val_loss: 5.4107 - val_mae: 5.8931
Epoch 124/500
8/8 [==============================] - 1s 66ms/step - loss: 4.8920 - mae: 5.3722 - val_loss: 4.9513 - val_mae: 5.4383
Epoch 125/500
8/8 [==============================] - 1s 68ms/step - loss: 4.8796 - mae: 5.3609 - val_loss: 5.1420 - val_mae: 5.6219
Epoch 126/500
8/8 [==============================] - 1s 70ms/step - loss: 4.6770 - mae: 5.1532 - val_loss: 5.1612 - val_mae: 5.6437
Epoch 127/500
8/8 [==============================] - 1s 71ms/step - loss: 4.7393 - mae: 5.2199 - val_loss: 4.9283 - val_mae: 5.4133
Epoch 128/500
8/8 [==============================] - 1s 64ms/step - loss: 4.7002 - mae: 5.1784 - val_loss: 5.3664 - val_mae: 5.8474
Epoch 129/500
8/8 [==============================] - 1s 67ms/step - loss: 4.8500 - mae: 5.3240 - val_loss: 6.5757 - val_mae: 7.0580
Epoch 130/500
8/8 [==============================] - 1s 70ms/step - loss: 4.6848 - mae: 5.1605 - val_loss: 4.9521 - val_mae: 5.4268
Epoch 131/500
8/8 [==============================] - 1s 67ms/step - loss: 5.2800 - mae: 5.7531 - val_loss: 5.7696 - val_mae: 6.2572
Epoch 132/500
8/8 [==============================] - 1s 69ms/step - loss: 5.6003 - mae: 6.0815 - val_loss: 5.9903 - val_mae: 6.4688
Epoch 133/500
8/8 [==============================] - 1s 67ms/step - loss: 4.7737 - mae: 5.2488 - val_loss: 6.4225 - val_mae: 6.9043
Epoch 134/500
8/8 [==============================] - 1s 68ms/step - loss: 4.8785 - mae: 5.3526 - val_loss: 5.4586 - val_mae: 5.9373
Epoch 135/500
8/8 [==============================] - 1s 67ms/step - loss: 4.6795 - mae: 5.1576 - val_loss: 5.6240 - val_mae: 6.1014
Epoch 136/500
8/8 [==============================] - 1s 67ms/step - loss: 4.8044 - mae: 5.2785 - val_loss: 4.9365 - val_mae: 5.4245
Epoch 137/500
8/8 [==============================] - 1s 67ms/step - loss: 4.8206 - mae: 5.2969 - val_loss: 5.0082 - val_mae: 5.4845
Epoch 138/500
8/8 [==============================] - 1s 68ms/step - loss: 4.7712 - mae: 5.2487 - val_loss: 5.0349 - val_mae: 5.5089
Epoch 139/500
8/8 [==============================] - 1s 71ms/step - loss: 4.5265 - mae: 4.9986 - val_loss: 4.9584 - val_mae: 5.4356
Epoch 140/500
8/8 [==============================] - 1s 70ms/step - loss: 5.1096 - mae: 5.5899 - val_loss: 4.9829 - val_mae: 5.4644
Epoch 141/500
8/8 [==============================] - 1s 72ms/step - loss: 4.7072 - mae: 5.1828 - val_loss: 5.3064 - val_mae: 5.7884
Epoch 142/500
8/8 [==============================] - 1s 67ms/step - loss: 4.4624 - mae: 4.9319 - val_loss: 6.4503 - val_mae: 6.9322
Epoch 143/500
8/8 [==============================] - 1s 68ms/step - loss: 4.6604 - mae: 5.1331 - val_loss: 6.7923 - val_mae: 7.2773
Epoch 144/500
8/8 [==============================] - 1s 72ms/step - loss: 5.0672 - mae: 5.5472 - val_loss: 4.9931 - val_mae: 5.4676
Epoch 145/500
8/8 [==============================] - 1s 67ms/step - loss: 4.5280 - mae: 5.0032 - val_loss: 5.0017 - val_mae: 5.4770
Epoch 146/500
8/8 [==============================] - 1s 68ms/step - loss: 4.5581 - mae: 5.0336 - val_loss: 5.0172 - val_mae: 5.4885
Epoch 147/500
8/8 [==============================] - 1s 70ms/step - loss: 4.4518 - mae: 4.9248 - val_loss: 6.6479 - val_mae: 7.1297
Epoch 148/500
8/8 [==============================] - 1s 68ms/step - loss: 4.7889 - mae: 5.2636 - val_loss: 5.6386 - val_mae: 6.1175
Epoch 149/500
8/8 [==============================] - 1s 64ms/step - loss: 4.5493 - mae: 5.0241 - val_loss: 4.9823 - val_mae: 5.4560
Epoch 150/500
8/8 [==============================] - 1s 69ms/step - loss: 4.7236 - mae: 5.2000 - val_loss: 6.0009 - val_mae: 6.4782
Epoch 151/500
8/8 [==============================] - 1s 68ms/step - loss: 5.1126 - mae: 5.5922 - val_loss: 7.1999 - val_mae: 7.6906
Epoch 152/500
8/8 [==============================] - 1s 66ms/step - loss: 5.5811 - mae: 6.0627 - val_loss: 7.2074 - val_mae: 7.6981
Epoch 153/500
8/8 [==============================] - 1s 72ms/step - loss: 5.1775 - mae: 5.6603 - val_loss: 5.4814 - val_mae: 5.9606
Epoch 154/500
8/8 [==============================] - 1s 71ms/step - loss: 4.6057 - mae: 5.0829 - val_loss: 5.0336 - val_mae: 5.5064
Epoch 155/500
8/8 [==============================] - 1s 69ms/step - loss: 4.7347 - mae: 5.2129 - val_loss: 5.1808 - val_mae: 5.6616
Epoch 156/500
8/8 [==============================] - 1s 68ms/step - loss: 4.7503 - mae: 5.2277 - val_loss: 6.2516 - val_mae: 6.7315
Epoch 157/500
8/8 [==============================] - 1s 68ms/step - loss: 4.7090 - mae: 5.1840 - val_loss: 5.7430 - val_mae: 6.2191
Epoch 158/500
8/8 [==============================] - 1s 70ms/step - loss: 4.4960 - mae: 4.9674 - val_loss: 5.5493 - val_mae: 6.0290
Epoch 159/500
8/8 [==============================] - 1s 70ms/step - loss: 4.5836 - mae: 5.0614 - val_loss: 5.9668 - val_mae: 6.4452
Epoch 160/500
8/8 [==============================] - 1s 68ms/step - loss: 4.6556 - mae: 5.1280 - val_loss: 5.8042 - val_mae: 6.2809
Out[7]:
<tensorflow.python.keras.callbacks.History at 0x7f7867cd7940>
In [8]:
model = keras.models.load_model("my_checkpoint")
In [9]:
rnn_forecast = model_forecast(
    model,
    series[split_time - window_size:-1],
    window_size)[:, 0]
In [10]:
plt.figure(figsize=(10, 6))
plot_series(time_valid, x_valid)
plot_series(time_valid, rnn_forecast)
In [11]:
keras.metrics.mean_absolute_error(x_valid, rnn_forecast).numpy()
Out[11]:
5.3282204

Sequence-to-Sequence Forecasting

In [12]:
def seq2seq_window_dataset(series, window_size, batch_size=32,
                           shuffle_buffer=1000):
    series = tf.expand_dims(series, axis=-1)
    ds = tf.data.Dataset.from_tensor_slices(series)
    ds = ds.window(window_size + 1, shift=1, drop_remainder=True)
    ds = ds.flat_map(lambda w: w.batch(window_size + 1))
    ds = ds.shuffle(shuffle_buffer)
    ds = ds.map(lambda w: (w[:-1], w[1:]))
    return ds.batch(batch_size).prefetch(1)
In [13]:
for X_batch, Y_batch in seq2seq_window_dataset(tf.range(10), 3,
                                               batch_size=1):
    print("X:", X_batch.numpy())
    print("Y:", Y_batch.numpy())
X: [[[1]
  [2]
  [3]]]
Y: [[[2]
  [3]
  [4]]]
X: [[[2]
  [3]
  [4]]]
Y: [[[3]
  [4]
  [5]]]
X: [[[4]
  [5]
  [6]]]
Y: [[[5]
  [6]
  [7]]]
X: [[[3]
  [4]
  [5]]]
Y: [[[4]
  [5]
  [6]]]
X: [[[5]
  [6]
  [7]]]
Y: [[[6]
  [7]
  [8]]]
X: [[[6]
  [7]
  [8]]]
Y: [[[7]
  [8]
  [9]]]
X: [[[0]
  [1]
  [2]]]
Y: [[[1]
  [2]
  [3]]]
In [14]:
keras.backend.clear_session()
tf.random.set_seed(42)
np.random.seed(42)

window_size = 30
train_set = seq2seq_window_dataset(x_train, window_size,
                                   batch_size=128)

model = keras.models.Sequential([
  keras.layers.SimpleRNN(100, return_sequences=True,
                         input_shape=[None, 1]),
  keras.layers.SimpleRNN(100, return_sequences=True),
  keras.layers.Dense(1),
  keras.layers.Lambda(lambda x: x * 200)
])
lr_schedule = keras.callbacks.LearningRateScheduler(
    lambda epoch: 1e-7 * 10**(epoch / 30))
optimizer = keras.optimizers.SGD(lr=1e-7, momentum=0.9)
model.compile(loss=keras.losses.Huber(),
              optimizer=optimizer,
              metrics=["mae"])
history = model.fit(train_set, epochs=100, callbacks=[lr_schedule])
Epoch 1/100
8/8 [==============================] - 0s 52ms/step - loss: 52.3618 - mae: 52.8614
Epoch 2/100
8/8 [==============================] - 0s 54ms/step - loss: 25.4714 - mae: 25.9671
Epoch 3/100
8/8 [==============================] - 0s 52ms/step - loss: 23.5029 - mae: 23.9972
Epoch 4/100
8/8 [==============================] - 0s 52ms/step - loss: 21.1367 - mae: 21.6294
Epoch 5/100
8/8 [==============================] - 0s 51ms/step - loss: 19.4762 - mae: 19.9674
Epoch 6/100
8/8 [==============================] - 0s 52ms/step - loss: 18.6742 - mae: 19.1647
Epoch 7/100
8/8 [==============================] - 0s 53ms/step - loss: 18.1698 - mae: 18.6587
Epoch 8/100
8/8 [==============================] - 0s 51ms/step - loss: 17.7679 - mae: 18.2568
Epoch 9/100
8/8 [==============================] - 0s 57ms/step - loss: 17.4389 - mae: 17.9280
Epoch 10/100
8/8 [==============================] - 0s 51ms/step - loss: 17.0989 - mae: 17.5875
Epoch 11/100
8/8 [==============================] - 0s 54ms/step - loss: 16.7679 - mae: 17.2569
Epoch 12/100
8/8 [==============================] - 0s 51ms/step - loss: 16.4296 - mae: 16.9185
Epoch 13/100
8/8 [==============================] - 0s 53ms/step - loss: 16.0786 - mae: 16.5664
Epoch 14/100
8/8 [==============================] - 0s 52ms/step - loss: 15.7223 - mae: 16.2095
Epoch 15/100
8/8 [==============================] - 0s 52ms/step - loss: 15.3619 - mae: 15.8493
Epoch 16/100
8/8 [==============================] - 0s 53ms/step - loss: 14.9962 - mae: 15.4842
Epoch 17/100
8/8 [==============================] - 0s 52ms/step - loss: 14.6108 - mae: 15.0986
Epoch 18/100
8/8 [==============================] - 0s 56ms/step - loss: 14.2178 - mae: 14.7051
Epoch 19/100
8/8 [==============================] - 0s 51ms/step - loss: 13.8218 - mae: 14.3086
Epoch 20/100
8/8 [==============================] - 0s 52ms/step - loss: 13.3871 - mae: 13.8738
Epoch 21/100
8/8 [==============================] - 0s 54ms/step - loss: 12.9509 - mae: 13.4372
Epoch 22/100
8/8 [==============================] - 0s 51ms/step - loss: 12.5123 - mae: 12.9977
Epoch 23/100
8/8 [==============================] - 0s 55ms/step - loss: 12.0817 - mae: 12.5669
Epoch 24/100
8/8 [==============================] - 0s 53ms/step - loss: 11.6471 - mae: 12.1312
Epoch 25/100
8/8 [==============================] - 0s 53ms/step - loss: 11.2324 - mae: 11.7161
Epoch 26/100
8/8 [==============================] - 0s 53ms/step - loss: 10.8651 - mae: 11.3486
Epoch 27/100
8/8 [==============================] - 0s 52ms/step - loss: 10.5209 - mae: 11.0050
Epoch 28/100
8/8 [==============================] - 0s 53ms/step - loss: 10.1947 - mae: 10.6790
Epoch 29/100
8/8 [==============================] - 0s 53ms/step - loss: 9.8873 - mae: 10.3706
Epoch 30/100
8/8 [==============================] - 0s 51ms/step - loss: 9.5928 - mae: 10.0763
Epoch 31/100
8/8 [==============================] - 0s 57ms/step - loss: 9.3295 - mae: 9.8131
Epoch 32/100
8/8 [==============================] - 0s 52ms/step - loss: 9.0906 - mae: 9.5738
Epoch 33/100
8/8 [==============================] - 0s 56ms/step - loss: 8.8725 - mae: 9.3552
Epoch 34/100
8/8 [==============================] - 0s 53ms/step - loss: 8.6903 - mae: 9.1725
Epoch 35/100
8/8 [==============================] - 0s 50ms/step - loss: 8.5312 - mae: 9.0130
Epoch 36/100
8/8 [==============================] - 0s 53ms/step - loss: 8.3607 - mae: 8.8433
Epoch 37/100
8/8 [==============================] - 0s 51ms/step - loss: 8.2321 - mae: 8.7129
Epoch 38/100
8/8 [==============================] - 0s 55ms/step - loss: 8.0740 - mae: 8.5555
Epoch 39/100
8/8 [==============================] - 0s 55ms/step - loss: 7.9503 - mae: 8.4314
Epoch 40/100
8/8 [==============================] - 0s 54ms/step - loss: 8.4994 - mae: 8.9838
Epoch 41/100
8/8 [==============================] - 0s 52ms/step - loss: 8.9800 - mae: 9.4655
Epoch 42/100
8/8 [==============================] - 0s 52ms/step - loss: 8.7824 - mae: 9.2682
Epoch 43/100
8/8 [==============================] - 0s 51ms/step - loss: 9.2905 - mae: 9.7787
Epoch 44/100
8/8 [==============================] - 0s 53ms/step - loss: 9.8253 - mae: 10.3155
Epoch 45/100
8/8 [==============================] - 0s 53ms/step - loss: 10.0397 - mae: 10.5304
Epoch 46/100
8/8 [==============================] - 0s 54ms/step - loss: 9.1716 - mae: 9.6592
Epoch 47/100
8/8 [==============================] - 0s 58ms/step - loss: 10.7927 - mae: 11.2828
Epoch 48/100
8/8 [==============================] - 0s 53ms/step - loss: 21.4238 - mae: 21.9191
Epoch 49/100
8/8 [==============================] - 0s 53ms/step - loss: 36.3110 - mae: 36.8087
Epoch 50/100
8/8 [==============================] - 0s 57ms/step - loss: 26.7279 - mae: 27.2257
Epoch 51/100
8/8 [==============================] - 0s 52ms/step - loss: 27.6947 - mae: 28.1918
Epoch 52/100
8/8 [==============================] - 0s 53ms/step - loss: 19.7707 - mae: 20.2668
Epoch 53/100
8/8 [==============================] - 0s 49ms/step - loss: 13.0310 - mae: 13.5245
Epoch 54/100
8/8 [==============================] - 0s 52ms/step - loss: 17.6625 - mae: 18.1576
Epoch 55/100
8/8 [==============================] - 0s 54ms/step - loss: 18.9101 - mae: 19.4063
Epoch 56/100
8/8 [==============================] - 0s 55ms/step - loss: 11.7622 - mae: 12.2534
Epoch 57/100
8/8 [==============================] - 0s 57ms/step - loss: 11.2963 - mae: 11.7880
Epoch 58/100
8/8 [==============================] - 0s 53ms/step - loss: 13.5497 - mae: 14.0409
Epoch 59/100
8/8 [==============================] - 0s 52ms/step - loss: 25.7918 - mae: 26.2909
Epoch 60/100
8/8 [==============================] - 0s 53ms/step - loss: 21.7125 - mae: 22.2108
Epoch 61/100
8/8 [==============================] - 0s 53ms/step - loss: 22.6214 - mae: 23.1194
Epoch 62/100
8/8 [==============================] - 0s 56ms/step - loss: 23.5890 - mae: 24.0868
Epoch 63/100
8/8 [==============================] - 0s 55ms/step - loss: 23.4749 - mae: 23.9723
Epoch 64/100
8/8 [==============================] - 0s 52ms/step - loss: 24.4249 - mae: 24.9224
Epoch 65/100
8/8 [==============================] - 0s 54ms/step - loss: 31.2873 - mae: 31.7850
Epoch 66/100
8/8 [==============================] - 0s 54ms/step - loss: 57.8155 - mae: 58.3144
Epoch 67/100
8/8 [==============================] - 0s 51ms/step - loss: 60.5208 - mae: 61.0182
Epoch 68/100
8/8 [==============================] - 0s 53ms/step - loss: 104.7181 - mae: 105.2180
Epoch 69/100
8/8 [==============================] - 0s 54ms/step - loss: 39.7097 - mae: 40.2081
Epoch 70/100
8/8 [==============================] - 0s 59ms/step - loss: 28.7296 - mae: 29.2279
Epoch 71/100
8/8 [==============================] - 0s 50ms/step - loss: 31.3738 - mae: 31.8732
Epoch 72/100
8/8 [==============================] - 0s 52ms/step - loss: 32.1060 - mae: 32.6052
Epoch 73/100
8/8 [==============================] - 0s 52ms/step - loss: 34.0749 - mae: 34.5741
Epoch 74/100
8/8 [==============================] - 0s 56ms/step - loss: 35.8158 - mae: 36.3152
Epoch 75/100
8/8 [==============================] - 0s 53ms/step - loss: 37.0048 - mae: 37.5041
Epoch 76/100
8/8 [==============================] - 0s 51ms/step - loss: 37.9328 - mae: 38.4320
Epoch 77/100
8/8 [==============================] - 0s 52ms/step - loss: 40.2948 - mae: 40.7942
Epoch 78/100
8/8 [==============================] - 0s 53ms/step - loss: 42.8200 - mae: 43.3192
Epoch 79/100
8/8 [==============================] - 0s 55ms/step - loss: 43.5942 - mae: 44.0937
Epoch 80/100
8/8 [==============================] - 0s 54ms/step - loss: 46.2161 - mae: 46.7157
Epoch 81/100
8/8 [==============================] - 0s 54ms/step - loss: 45.2468 - mae: 45.7463
Epoch 82/100
8/8 [==============================] - 0s 48ms/step - loss: 47.8520 - mae: 48.3516
Epoch 83/100
8/8 [==============================] - 0s 50ms/step - loss: 48.0796 - mae: 48.5793
Epoch 84/100
8/8 [==============================] - 0s 50ms/step - loss: 49.8012 - mae: 50.3009
Epoch 85/100
8/8 [==============================] - 0s 52ms/step - loss: 51.7401 - mae: 52.2398
Epoch 86/100
8/8 [==============================] - 0s 55ms/step - loss: 52.7637 - mae: 53.2634
Epoch 87/100
8/8 [==============================] - 0s 54ms/step - loss: 53.7127 - mae: 54.2124
Epoch 88/100
8/8 [==============================] - 0s 55ms/step - loss: 52.5027 - mae: 53.0023
Epoch 89/100
8/8 [==============================] - 0s 53ms/step - loss: 59.4786 - mae: 59.9784
Epoch 90/100
8/8 [==============================] - 0s 53ms/step - loss: 58.9021 - mae: 59.4017
Epoch 91/100
8/8 [==============================] - 0s 55ms/step - loss: 62.9903 - mae: 63.4900
Epoch 92/100
8/8 [==============================] - 0s 52ms/step - loss: 63.3712 - mae: 63.8710
Epoch 93/100
8/8 [==============================] - 0s 53ms/step - loss: 81.0666 - mae: 81.5664
Epoch 94/100
8/8 [==============================] - 0s 54ms/step - loss: 92.1247 - mae: 92.6244
Epoch 95/100
8/8 [==============================] - 0s 57ms/step - loss: 93.2812 - mae: 93.7811
Epoch 96/100
8/8 [==============================] - 0s 54ms/step - loss: 92.5085 - mae: 93.0083
Epoch 97/100
8/8 [==============================] - 0s 51ms/step - loss: 86.3362 - mae: 86.8360
Epoch 98/100
8/8 [==============================] - 0s 54ms/step - loss: 102.3958 - mae: 102.8949
Epoch 99/100
8/8 [==============================] - 0s 52ms/step - loss: 66.6114 - mae: 67.1100
Epoch 100/100
8/8 [==============================] - 0s 54ms/step - loss: 163.3482 - mae: 163.8480
In [15]:
plt.semilogx(history.history["lr"], history.history["loss"])
plt.axis([1e-7, 1e-4, 0, 30])
Out[15]:
(1e-07, 0.0001, 0.0, 30.0)
In [16]:
keras.backend.clear_session()
tf.random.set_seed(42)
np.random.seed(42)

window_size = 30
train_set = seq2seq_window_dataset(x_train, window_size,
                                   batch_size=128)
valid_set = seq2seq_window_dataset(x_valid, window_size,
                                   batch_size=128)

model = keras.models.Sequential([
  keras.layers.SimpleRNN(100, return_sequences=True,
                         input_shape=[None, 1]),
  keras.layers.SimpleRNN(100, return_sequences=True),
  keras.layers.Dense(1),
  keras.layers.Lambda(lambda x: x * 200.0)
])
optimizer = keras.optimizers.SGD(lr=1e-6, momentum=0.9)
model.compile(loss=keras.losses.Huber(),
              optimizer=optimizer,
              metrics=["mae"])
early_stopping = keras.callbacks.EarlyStopping(patience=10)
model.fit(train_set, epochs=500,
          validation_data=valid_set,
          callbacks=[early_stopping])
Epoch 1/500
8/8 [==============================] - 1s 112ms/step - loss: 54.1428 - mae: 54.6413 - val_loss: 17.9209 - val_mae: 18.4158
Epoch 2/500
8/8 [==============================] - 1s 68ms/step - loss: 34.1721 - mae: 34.6694 - val_loss: 20.5482 - val_mae: 21.0441
Epoch 3/500
8/8 [==============================] - 1s 70ms/step - loss: 27.5074 - mae: 28.0027 - val_loss: 37.8713 - val_mae: 38.3707
Epoch 4/500
8/8 [==============================] - 1s 68ms/step - loss: 21.0646 - mae: 21.5578 - val_loss: 25.2685 - val_mae: 25.7675
Epoch 5/500
8/8 [==============================] - 1s 68ms/step - loss: 17.1768 - mae: 17.6667 - val_loss: 15.3555 - val_mae: 15.8505
Epoch 6/500
8/8 [==============================] - 1s 68ms/step - loss: 15.0382 - mae: 15.5247 - val_loss: 9.4217 - val_mae: 9.9070
Epoch 7/500
8/8 [==============================] - 1s 72ms/step - loss: 13.9508 - mae: 14.4372 - val_loss: 9.1617 - val_mae: 9.6474
Epoch 8/500
8/8 [==============================] - 1s 70ms/step - loss: 13.0593 - mae: 13.5433 - val_loss: 9.8610 - val_mae: 10.3506
Epoch 9/500
8/8 [==============================] - 1s 71ms/step - loss: 12.4298 - mae: 12.9131 - val_loss: 8.1729 - val_mae: 8.6572
Epoch 10/500
8/8 [==============================] - 1s 69ms/step - loss: 11.8905 - mae: 12.3737 - val_loss: 8.1780 - val_mae: 8.6608
Epoch 11/500
8/8 [==============================] - 1s 73ms/step - loss: 11.4247 - mae: 11.9069 - val_loss: 8.2508 - val_mae: 8.7357
Epoch 12/500
8/8 [==============================] - 1s 74ms/step - loss: 11.0519 - mae: 11.5337 - val_loss: 7.7045 - val_mae: 8.1899
Epoch 13/500
8/8 [==============================] - 1s 65ms/step - loss: 10.6913 - mae: 11.1735 - val_loss: 7.8013 - val_mae: 8.2838
Epoch 14/500
8/8 [==============================] - 1s 74ms/step - loss: 10.3653 - mae: 10.8473 - val_loss: 7.5620 - val_mae: 8.0454
Epoch 15/500
8/8 [==============================] - 1s 70ms/step - loss: 10.0908 - mae: 10.5734 - val_loss: 7.4425 - val_mae: 7.9259
Epoch 16/500
8/8 [==============================] - 1s 70ms/step - loss: 9.8478 - mae: 10.3300 - val_loss: 7.3019 - val_mae: 7.7855
Epoch 17/500
8/8 [==============================] - 1s 69ms/step - loss: 9.6206 - mae: 10.1028 - val_loss: 7.2305 - val_mae: 7.7128
Epoch 18/500
8/8 [==============================] - 1s 68ms/step - loss: 9.4222 - mae: 9.9045 - val_loss: 7.2973 - val_mae: 7.7811
Epoch 19/500
8/8 [==============================] - 1s 73ms/step - loss: 9.2415 - mae: 9.7235 - val_loss: 7.0410 - val_mae: 7.5227
Epoch 20/500
8/8 [==============================] - 1s 71ms/step - loss: 9.0733 - mae: 9.5550 - val_loss: 6.9231 - val_mae: 7.4054
Epoch 21/500
8/8 [==============================] - 1s 71ms/step - loss: 8.9267 - mae: 9.4083 - val_loss: 6.8684 - val_mae: 7.3498
Epoch 22/500
8/8 [==============================] - 1s 72ms/step - loss: 8.7896 - mae: 9.2713 - val_loss: 6.7681 - val_mae: 7.2486
Epoch 23/500
8/8 [==============================] - 1s 71ms/step - loss: 8.6424 - mae: 9.1235 - val_loss: 6.9130 - val_mae: 7.3982
Epoch 24/500
8/8 [==============================] - 1s 66ms/step - loss: 8.5131 - mae: 8.9936 - val_loss: 6.7497 - val_mae: 7.2318
Epoch 25/500
8/8 [==============================] - 1s 69ms/step - loss: 8.3955 - mae: 8.8765 - val_loss: 6.6384 - val_mae: 7.1198
Epoch 26/500
8/8 [==============================] - 1s 70ms/step - loss: 8.2890 - mae: 8.7705 - val_loss: 6.4466 - val_mae: 6.9236
Epoch 27/500
8/8 [==============================] - 1s 77ms/step - loss: 8.1784 - mae: 8.6594 - val_loss: 6.4514 - val_mae: 6.9306
Epoch 28/500
8/8 [==============================] - 1s 69ms/step - loss: 8.0964 - mae: 8.5775 - val_loss: 6.5569 - val_mae: 7.0406
Epoch 29/500
8/8 [==============================] - 1s 73ms/step - loss: 7.9966 - mae: 8.4773 - val_loss: 6.2728 - val_mae: 6.7472
Epoch 30/500
8/8 [==============================] - 1s 75ms/step - loss: 7.9174 - mae: 8.3981 - val_loss: 6.6966 - val_mae: 7.1815
Epoch 31/500
8/8 [==============================] - 1s 80ms/step - loss: 7.8317 - mae: 8.3122 - val_loss: 6.1268 - val_mae: 6.6057
Epoch 32/500
8/8 [==============================] - 1s 75ms/step - loss: 7.7520 - mae: 8.2328 - val_loss: 6.6856 - val_mae: 7.1703
Epoch 33/500
8/8 [==============================] - 1s 72ms/step - loss: 7.6775 - mae: 8.1583 - val_loss: 6.0753 - val_mae: 6.5512
Epoch 34/500
8/8 [==============================] - 1s 77ms/step - loss: 7.5826 - mae: 8.0634 - val_loss: 6.3348 - val_mae: 6.8185
Epoch 35/500
8/8 [==============================] - 1s 77ms/step - loss: 7.4951 - mae: 7.9749 - val_loss: 6.0378 - val_mae: 6.5139
Epoch 36/500
8/8 [==============================] - 1s 75ms/step - loss: 7.4325 - mae: 7.9133 - val_loss: 6.1865 - val_mae: 6.6674
Epoch 37/500
8/8 [==============================] - 1s 72ms/step - loss: 7.3559 - mae: 7.8373 - val_loss: 6.0785 - val_mae: 6.5581
Epoch 38/500
8/8 [==============================] - 1s 69ms/step - loss: 7.2913 - mae: 7.7732 - val_loss: 6.0684 - val_mae: 6.5479
Epoch 39/500
8/8 [==============================] - 1s 70ms/step - loss: 7.2333 - mae: 7.7151 - val_loss: 6.1053 - val_mae: 6.5854
Epoch 40/500
8/8 [==============================] - 1s 75ms/step - loss: 7.1725 - mae: 7.6538 - val_loss: 5.9208 - val_mae: 6.3990
Epoch 41/500
8/8 [==============================] - 1s 82ms/step - loss: 7.1129 - mae: 7.5941 - val_loss: 5.9483 - val_mae: 6.4270
Epoch 42/500
8/8 [==============================] - 1s 69ms/step - loss: 7.0587 - mae: 7.5404 - val_loss: 5.9384 - val_mae: 6.4174
Epoch 43/500
8/8 [==============================] - 1s 73ms/step - loss: 7.0068 - mae: 7.4883 - val_loss: 5.8196 - val_mae: 6.2968
Epoch 44/500
8/8 [==============================] - 1s 71ms/step - loss: 6.9536 - mae: 7.4349 - val_loss: 5.9777 - val_mae: 6.4585
Epoch 45/500
8/8 [==============================] - 1s 70ms/step - loss: 6.9029 - mae: 7.3843 - val_loss: 5.8452 - val_mae: 6.3236
Epoch 46/500
8/8 [==============================] - 1s 66ms/step - loss: 6.8559 - mae: 7.3373 - val_loss: 5.9745 - val_mae: 6.4567
Epoch 47/500
8/8 [==============================] - 1s 71ms/step - loss: 6.8096 - mae: 7.2905 - val_loss: 5.6930 - val_mae: 6.1683
Epoch 48/500
8/8 [==============================] - 1s 70ms/step - loss: 6.7795 - mae: 7.2602 - val_loss: 5.8916 - val_mae: 6.3713
Epoch 49/500
8/8 [==============================] - 1s 73ms/step - loss: 6.7324 - mae: 7.2131 - val_loss: 5.7787 - val_mae: 6.2573
Epoch 50/500
8/8 [==============================] - 1s 70ms/step - loss: 6.6880 - mae: 7.1684 - val_loss: 5.7283 - val_mae: 6.2063
Epoch 51/500
8/8 [==============================] - 1s 70ms/step - loss: 6.6452 - mae: 7.1259 - val_loss: 5.7470 - val_mae: 6.2248
Epoch 52/500
8/8 [==============================] - 1s 72ms/step - loss: 6.6150 - mae: 7.0959 - val_loss: 5.7311 - val_mae: 6.2093
Epoch 53/500
8/8 [==============================] - 1s 67ms/step - loss: 6.5714 - mae: 7.0514 - val_loss: 5.6912 - val_mae: 6.1686
Epoch 54/500
8/8 [==============================] - 1s 69ms/step - loss: 6.5348 - mae: 7.0153 - val_loss: 5.6069 - val_mae: 6.0830
Epoch 55/500
8/8 [==============================] - 1s 73ms/step - loss: 6.5013 - mae: 6.9819 - val_loss: 5.7314 - val_mae: 6.2103
Epoch 56/500
8/8 [==============================] - 1s 69ms/step - loss: 6.4726 - mae: 6.9530 - val_loss: 5.6089 - val_mae: 6.0850
Epoch 57/500
8/8 [==============================] - 1s 66ms/step - loss: 6.4448 - mae: 6.9249 - val_loss: 5.7441 - val_mae: 6.2233
Epoch 58/500
8/8 [==============================] - 1s 69ms/step - loss: 6.4214 - mae: 6.9012 - val_loss: 5.5397 - val_mae: 6.0159
Epoch 59/500
8/8 [==============================] - 1s 67ms/step - loss: 6.3940 - mae: 6.8735 - val_loss: 5.5486 - val_mae: 6.0250
Epoch 60/500
8/8 [==============================] - 1s 71ms/step - loss: 6.3687 - mae: 6.8481 - val_loss: 5.6414 - val_mae: 6.1190
Epoch 61/500
8/8 [==============================] - 1s 75ms/step - loss: 6.3471 - mae: 6.8263 - val_loss: 5.4949 - val_mae: 5.9711
Epoch 62/500
8/8 [==============================] - 1s 71ms/step - loss: 6.3239 - mae: 6.8034 - val_loss: 5.6962 - val_mae: 6.1758
Epoch 63/500
8/8 [==============================] - 1s 69ms/step - loss: 6.3027 - mae: 6.7813 - val_loss: 5.4634 - val_mae: 5.9396
Epoch 64/500
8/8 [==============================] - 1s 68ms/step - loss: 6.2800 - mae: 6.7595 - val_loss: 5.6596 - val_mae: 6.1384
Epoch 65/500
8/8 [==============================] - 1s 68ms/step - loss: 6.2597 - mae: 6.7382 - val_loss: 5.4920 - val_mae: 5.9685
Epoch 66/500
8/8 [==============================] - 1s 73ms/step - loss: 6.2370 - mae: 6.7161 - val_loss: 5.5227 - val_mae: 5.9986
Epoch 67/500
8/8 [==============================] - 1s 71ms/step - loss: 6.2181 - mae: 6.6968 - val_loss: 5.5079 - val_mae: 5.9838
Epoch 68/500
8/8 [==============================] - 1s 72ms/step - loss: 6.1976 - mae: 6.6767 - val_loss: 5.5153 - val_mae: 5.9910
Epoch 69/500
8/8 [==============================] - 1s 69ms/step - loss: 6.1796 - mae: 6.6588 - val_loss: 5.5412 - val_mae: 6.0176
Epoch 70/500
8/8 [==============================] - 1s 71ms/step - loss: 6.1641 - mae: 6.6427 - val_loss: 5.3948 - val_mae: 5.8713
Epoch 71/500
8/8 [==============================] - 1s 71ms/step - loss: 6.1562 - mae: 6.6361 - val_loss: 5.5790 - val_mae: 6.0581
Epoch 72/500
8/8 [==============================] - 1s 68ms/step - loss: 6.1331 - mae: 6.6120 - val_loss: 5.4278 - val_mae: 5.9045
Epoch 73/500
8/8 [==============================] - 1s 67ms/step - loss: 6.1158 - mae: 6.5956 - val_loss: 5.5849 - val_mae: 6.0648
Epoch 74/500
8/8 [==============================] - 1s 68ms/step - loss: 6.0946 - mae: 6.5735 - val_loss: 5.4474 - val_mae: 5.9229
Epoch 75/500
8/8 [==============================] - 1s 71ms/step - loss: 6.0770 - mae: 6.5565 - val_loss: 5.5412 - val_mae: 6.0205
Epoch 76/500
8/8 [==============================] - 1s 69ms/step - loss: 6.0624 - mae: 6.5409 - val_loss: 5.4268 - val_mae: 5.9025
Epoch 77/500
8/8 [==============================] - 1s 73ms/step - loss: 6.0439 - mae: 6.5231 - val_loss: 5.4290 - val_mae: 5.9047
Epoch 78/500
8/8 [==============================] - 1s 69ms/step - loss: 6.0280 - mae: 6.5074 - val_loss: 5.4693 - val_mae: 5.9471
Epoch 79/500
8/8 [==============================] - 1s 71ms/step - loss: 6.0133 - mae: 6.4930 - val_loss: 5.4171 - val_mae: 5.8927
Epoch 80/500
8/8 [==============================] - 1s 70ms/step - loss: 5.9990 - mae: 6.4783 - val_loss: 5.5149 - val_mae: 5.9946
Out[16]:
<tensorflow.python.keras.callbacks.History at 0x7f78650a6b00>
In [17]:
rnn_forecast = model_forecast(model, series[..., np.newaxis], window_size)
rnn_forecast = rnn_forecast[split_time - window_size:-1, -1, 0]
In [18]:
plt.figure(figsize=(10, 6))
plot_series(time_valid, x_valid)
plot_series(time_valid, rnn_forecast)
In [19]:
keras.metrics.mean_absolute_error(x_valid, rnn_forecast).numpy()
Out[19]:
5.4588695